use std::collections::hashmap::{HashMap, HashSet, Occupied, Vacant};
use term::color::YELLOW;
-use core::{Package, PackageId, Resolve};
+use core::{Package, PackageId, Resolve, PackageSet};
use util::{Config, TaskPool, DependencyQueue, Fresh, Dirty, Freshness};
use util::{CargoResult, Dependency, profile};
tx: Sender<Message>,
rx: Receiver<Message>,
resolve: &'a Resolve,
+ packages: &'a PackageSet,
active: uint,
pending: HashMap<(&'a PackageId, TargetStage), PendingBuild>,
state: HashMap<&'a PackageId, Freshness>,
StageCustomBuild,
StageLibraries,
StageBinaries,
- StageEnd,
+ StageTests,
}
type Message = (PackageId, TargetStage, Freshness, CargoResult<()>);
impl<'a, 'b> JobQueue<'a, 'b> {
- pub fn new(resolve: &'a Resolve, config: &mut Config) -> JobQueue<'a, 'b> {
+ pub fn new(resolve: &'a Resolve, packages: &'a PackageSet,
+ config: &mut Config) -> JobQueue<'a, 'b> {
let (tx, rx) = channel();
JobQueue {
pool: TaskPool::new(config.jobs()),
tx: tx,
rx: rx,
resolve: resolve,
+ packages: packages,
active: 0,
pending: HashMap::new(),
state: HashMap::new(),
};
// Add the package to the dependency graph
- self.queue.enqueue(&self.resolve, Fresh,
+ self.queue.enqueue(&(self.resolve, self.packages), Fresh,
(pkg.get_package_id(), stage),
(pkg, jobs));
}
}
}
-impl<'a> Dependency<&'a Resolve> for (&'a PackageId, TargetStage) {
- fn dependencies(&self, resolve: &&'a Resolve)
+impl<'a> Dependency<(&'a Resolve, &'a PackageSet)>
+ for (&'a PackageId, TargetStage)
+{
+ fn dependencies(&self, &(resolve, packages): &(&'a Resolve, &'a PackageSet))
-> Vec<(&'a PackageId, TargetStage)> {
// This implementation of `Dependency` is the driver for the structure
// of the dependency graph of packages to be built. The "key" here is
// the start state which depends on the ending state of all dependent
// packages (as determined by the resolve context).
let (id, stage) = *self;
+ let pkg = packages.iter().find(|p| p.get_package_id() == id).unwrap();
+ let deps = resolve.deps(id).into_iter().flat_map(|a| a)
+ .filter(|dep| *dep != id);
match stage {
StageStart => {
- resolve.deps(id).into_iter().flat_map(|a| a).filter(|dep| {
- *dep != id
+ // Only transitive dependencies are needed to start building a
+ // package. Non transitive dependencies (dev dependencies) are
+ // only used to build tests.
+ deps.filter(|dep| {
+ let dep = pkg.get_dependencies().iter().find(|d| {
+ d.get_name() == dep.get_name()
+ }).unwrap();
+ dep.is_transitive()
}).map(|dep| {
- (dep, StageEnd)
+ (dep, StageLibraries)
}).collect()
}
StageCustomBuild => vec![(id, StageStart)],
StageLibraries => vec![(id, StageCustomBuild)],
StageBinaries => vec![(id, StageLibraries)],
- StageEnd => vec![(id, StageBinaries), (id, StageLibraries)],
+ StageTests => {
+ let mut ret = vec![(id, StageLibraries)];
+ ret.extend(deps.filter(|dep| {
+ let dep = pkg.get_dependencies().iter().find(|d| {
+ d.get_name() == dep.get_name()
+ }).unwrap();
+ !dep.is_transitive()
+ }).map(|dep| {
+ (dep, StageLibraries)
+ }));
+ ret
+ }
}
}
}
use util::{Config, internal, ChainError, Fresh, profile};
use self::job::{Job, Work};
-use self::job_queue::{JobQueue, StageStart, StageCustomBuild, StageLibraries};
-use self::job_queue::{StageBinaries, StageEnd};
+use self::job_queue as jq;
+use self::job_queue::JobQueue;
+use self::context::{Context, PlatformRequirement, PlatformTarget};
+use self::context::{PlatformPlugin, PlatformPluginAndTarget};
pub use self::compilation::Compilation;
pub use self::context::Context;
let mut cx = try!(Context::new(env, resolve, sources, deps, config,
host_layout, target_layout));
- let mut queue = JobQueue::new(cx.resolve, cx.config);
+ let mut queue = JobQueue::new(cx.resolve, deps, cx.config);
// First ensure that the destination directory exists
try!(cx.prepare(pkg));
let (plugin1, plugin2) = fingerprint::prepare_init(cx, pkg, KindPlugin);
init.push((Job::new(plugin1, plugin2, String::new()), Fresh));
}
- jobs.enqueue(pkg, StageStart, init);
+ jobs.enqueue(pkg, jq::StageStart, init);
// First part of the build step of a target is to execute all of the custom
// build commands.
for cmd in build_cmds.into_iter() { try!(cmd()) }
dirty()
};
- jobs.enqueue(pkg, StageCustomBuild, vec![(job(dirty, fresh, desc),
- freshness)]);
+ jobs.enqueue(pkg, jq::StageCustomBuild, vec![(job(dirty, fresh, desc),
+ freshness)]);
// After the custom command has run, execute rustc for all targets of our
// package.
//
// Each target has its own concept of freshness to ensure incremental
// rebuilds on the *target* granularity, not the *package* granularity.
- let (mut libs, mut bins) = (Vec::new(), Vec::new());
+ let (mut libs, mut bins, mut tests) = (Vec::new(), Vec::new(), Vec::new());
for &target in targets.iter() {
let work = if target.get_profile().is_doc() {
let (rustdoc, desc) = try!(rustdoc(pkg, target, cx));
try!(rustc(pkg, target, cx, req))
};
- let dst = if target.is_lib() {&mut libs} else {&mut bins};
+ let dst = match (target.is_lib(), target.get_profile().is_test()) {
+ (_, true) => &mut tests,
+ (true, _) => &mut libs,
+ (false, false) => &mut bins,
+ };
for (work, kind, desc) in work.into_iter() {
let (freshness, dirty, fresh) =
try!(fingerprint::prepare_target(cx, pkg, target, kind));
dst.push((job(dirty, fresh, desc), freshness));
}
}
- jobs.enqueue(pkg, StageLibraries, libs);
- jobs.enqueue(pkg, StageBinaries, bins);
- jobs.enqueue(pkg, StageEnd, Vec::new());
+ jobs.enqueue(pkg, jq::StageLibraries, libs);
+ jobs.enqueue(pkg, jq::StageBinaries, bins);
+ jobs.enqueue(pkg, jq::StageTests, tests);
Ok(())
}
// a second time.
assert_that(p.process(cargo_dir().join("cargo")).arg("test"),
execs().with_stdout(format!("\
-{compiling} bar v0.5.0 ({bar}#[..])
-{compiling} foo v0.5.0 ({url})
+{compiling} [..] v0.5.0 ([..])
+{compiling} [..] v0.5.0 ([..]
{running} target[..]foo-[..]
running 1 test
test result: ok. 1 passed; 0 failed; 0 ignored; 0 measured
-", compiling = COMPILING, url = p.url(), running = RUNNING, bar = p2.url())));
+", compiling = COMPILING, running = RUNNING)));
})
test!(git_build_cmd_freshness {
p2.build();
assert_that(p.cargo_process("test"),
execs().with_stdout(format!("\
-{compiling} bar v0.5.0 ({url})
-{compiling} foo v0.5.0 ({url})
+{compiling} [..] v0.5.0 ({url})
+{compiling} [..] v0.5.0 ({url})
{running} target[..]foo-[..]
running 0 tests
execs().with_stdout("1\n"));
})
+test!(dev_deps_no_rebuild_lib {
+ let p = project("foo")
+ .file("Cargo.toml", r#"
+ [project]
+ name = "foo"
+ version = "0.5.0"
+ authors = []
+
+ [dev-dependencies.bar]
+ path = "bar"
+
+ [lib]
+ name = "foo"
+ doctest = false
+ "#)
+ .file("src/lib.rs", "")
+ .file("bar/Cargo.toml", r#"
+ [package]
+
+ name = "bar"
+ version = "0.5.0"
+ authors = ["wycats@example.com"]
+ "#)
+ .file("bar/src/lib.rs", "pub fn bar() {}");
+ p.build();
+ assert_that(p.process(cargo_dir().join("cargo")).arg("build"),
+ execs().with_status(0)
+ .with_stdout(format!("{} foo v0.5.0 ({})\n",
+ COMPILING, p.url())));
+ p.root().move_into_the_past().assert();
+
+ // Now that we've built the library, it *should not* be built again as part
+ // of `cargo test`, even if we have some dev dependencies that weren't
+ // previously built.
+ File::create(&p.root().join("src/lib.rs")).write_str(r#"
+ #[cfg(test)] extern crate bar;
+ #[cfg(not(test))] fn foo() { bar(); }
+ "#).unwrap();
+ p.root().join("src/lib.rs").move_into_the_past().assert();
+
+ assert_that(p.process(cargo_dir().join("cargo")).arg("test"),
+ execs().with_status(0)
+ .with_stdout(format!("\
+{} [..] v0.5.0 ({})
+{} [..] v0.5.0 ({})
+Running target[..]foo-[..]
+
+running 0 tests
+
+test result: ok. 0 passed; 0 failed; 0 ignored; 0 measured
+
+", COMPILING, p.url(), COMPILING, p.url())));
+})